In [32]:
from __future__ import print_function
import torch
import torch.nn as nn
import torchvision.utils as utils
from torchvision.utils import save_image
import torch.nn.parallel 
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import numpy as np
import os

import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
In [5]:
manualSeed = 999
torch.manual_seed(manualSeed)
Out[5]:
<torch._C.Generator at 0x11704ab70>
In [6]:
dataroot = "/celeba-dataset/"
workers = 2
batch_size = 64
image_size = 64
nc = 3
nz = 100
ngf = 64
ndf = 64
num_epochs = 5
lr = 0.0002
beta1 = 0.5
In [8]:
dataset = dset.ImageFolder(root = dataroot,
                          transform = transforms.Compose(
                          [transforms.Resize(image_size),
                           transforms.CenterCrop(image_size),
                           transforms.ToTensor(),
                           transforms.Normalize((0.5, 0.5, 0.5),(0.5, 0.5, 0.5))
                          ]))
dataloader = torch.utils.data.DataLoader(dataset, batch_size = batch_size,
                                         shuffle= True, num_workers= workers)
device = torch.device("cuda:0" if (torch.cuda.is_available() and ngpu > 0) else "cpu")
In [10]:
# Plot some training images
real_batch = next(iter(dataloader))
plt.figure(figsize=(8,8))
plt.axis("off")
plt.title("Training Images")
plt.imshow(np.transpose(utils.make_grid(real_batch[0].to(device)[:64], padding=2, normalize=True).cpu(),(1,2,0)))
Out[10]:
<matplotlib.image.AxesImage at 0x1258ad518>
In [11]:
# Weight Initialization 
def weights_init(m):
    classname = m.__class__.__name__
    if classname.find('Conv') != -1:
        nn.init.normal_(m.weight.data, 0.0, 0.02)
    elif classname.find('BatchNorm') != -1:
        nn.init.normal_(m.weight.data, 1.0, 0.02)
        nn.init.constant_(m.bias.data, 0)
In [13]:
# Generator
class Generator(nn.Module):
    def __init__(self):
        super(Generator, self).__init__()
        self.main = nn.Sequential(
            
            nn.ConvTranspose2d(nz, ngf * 8, 4, 1, 0, bias=False),
            nn.BatchNorm2d(ngf * 8),
            nn.ReLU(True),
            # state size. (ngf*8) x 4 x 4
            nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf * 4),
            nn.ReLU(True),
            # state size. (ngf*4) x 8 x 8
            nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf * 2),
            nn.ReLU(True),
            # state size. (ngf*2) x 16 x 16
            nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ngf),
            nn.ReLU(True),
            # state size. (ngf) x 32 x 32
            nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),
            nn.Tanh()
            # state size. (nc) x 64 x 64          
        )
        
    def forward(self, inputs):
        return self.main(inputs)
In [14]:
generator = Generator()
generator.apply(weights_init)
print(generator)
Generator(
  (main): Sequential(
    (0): ConvTranspose2d(100, 512, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (2): ReLU(inplace=True)
    (3): ConvTranspose2d(512, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (4): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (5): ReLU(inplace=True)
    (6): ConvTranspose2d(256, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (7): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (8): ReLU(inplace=True)
    (9): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (10): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (11): ReLU(inplace=True)
    (12): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (13): Tanh()
  )
)
In [15]:
# Discriminator
class Discriminator(nn.Module):
    def __init__(self):
        super(Discriminator, self).__init__()
        self.main = nn.Sequential(
        
            nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
            nn.LeakyReLU(0.2, inplace=True),
            
            nn.Conv2d(ndf, ndf*2, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf*2),
            nn.LeakyReLU(0.2, inplace=True),
            
            nn.Conv2d(ndf*2, ndf *4, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf*4),
            nn.LeakyReLU(0.2, inplace=True),
            
            nn.Conv2d(ndf*4, ndf*8, 4, 2, 1, bias=False),
            nn.BatchNorm2d(ndf*8),
            nn.LeakyReLU(0.2, inplace=True),
            
            nn.Conv2d(ndf*8, 1, 4, 1, 0, bias=False),
            nn.Sigmoid()
        )
        
    def forward(self, inputs):
        return self.main(inputs)
In [16]:
discriminator = Discriminator()
discriminator.apply(weights_init)
print(discriminator)
Discriminator(
  (main): Sequential(
    (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (1): LeakyReLU(negative_slope=0.2, inplace=True)
    (2): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (3): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (4): LeakyReLU(negative_slope=0.2, inplace=True)
    (5): Conv2d(128, 256, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (6): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (7): LeakyReLU(negative_slope=0.2, inplace=True)
    (8): Conv2d(256, 512, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
    (9): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (10): LeakyReLU(negative_slope=0.2, inplace=True)
    (11): Conv2d(512, 1, kernel_size=(4, 4), stride=(1, 1), bias=False)
    (12): Sigmoid()
  )
)
In [17]:
Criterion = nn.BCELoss()
fixed_noise = torch.randn(64, nz, 1, 1)
real_label = 1
fake_label = 0
optimizer_D = optim.Adam(discriminator.parameters(), lr=lr, betas=(beta1, 0.999))
optimizer_G = optim.Adam(generator.parameters(), lr=lr, betas=(beta1, 0.999))
In [20]:
# Lists to keep track of progress
img_list = []
G_losses = []
D_losses = []
iters = 0

print("Starting Training Loop.....")
for epoch in range(num_epochs):
    for i, data in enumerate(dataloader, 0):
        
        discriminator.zero_grad()
        real = data[0]
        b_size = real.size(0)
        label = torch.full((b_size,), real_label)
        output = discriminator(real).view(-1)
        error_d_real = Criterion(output, label)
        
        error_d_real.backward()
        D_x = output.mean().item()
        
        
        noise = torch.randn(b_size, nz, 1, 1)
        fake = generator(noise)
        label.fill_(fake_label)
        output = discriminator(fake.detach()).view(-1)
        error_d_fake = Criterion(output, label)
        error_d_fake.backward()
        d_g_z1 = output.mean().item()
        err_d = error_d_fake + error_d_real
        optimizer_D.step()
        
        generator.zero_grad()
        label.fill_(real_label)
        output = discriminator(fake).view(-1)
        error_g = Criterion(output, label)
        error_g.backward()
        d_g_z2 = output.mean().item()
        optimizer_G.step()
        
        if i % 50 == 0:
            print('[%d/%d][%d/%d]\tLoss_D: %.4f\tLoss_G: %.4f\tD(x): %.4f\tD(G(z)): %.4f / %.4f'
                  % (epoch, num_epochs, i, len(dataloader),
                     err_d.item(), error_g.item(), D_x, d_g_z1, d_g_z2))

        # Save Losses for plotting later
        G_losses.append(error_g.item())
        D_losses.append(err_d.item())

        # Check how the generator is doing by saving G's output on fixed_noise
        if (iters % 500 == 0) or ((epoch == num_epochs-1) and (i == len(dataloader)-1)):
            with torch.no_grad():
                fake = generator(fixed_noise).detach().cpu()
            img_list.append(utils.make_grid(fake, padding=2, normalize=True))

        iters += 1
Starting Training Loop.....
[0/5][0/3166]	Loss_D: 1.0209	Loss_G: 6.3912	D(x): 0.9475	D(G(z)): 0.5543 / 0.0027
[0/5][50/3166]	Loss_D: 0.1433	Loss_G: 24.0613	D(x): 0.9059	D(G(z)): 0.0000 / 0.0000
[0/5][100/3166]	Loss_D: 0.7484	Loss_G: 13.7534	D(x): 0.9686	D(G(z)): 0.4338 / 0.0000
[0/5][150/3166]	Loss_D: 0.3554	Loss_G: 4.0924	D(x): 0.8597	D(G(z)): 0.1313 / 0.0315
[0/5][200/3166]	Loss_D: 0.4532	Loss_G: 4.1385	D(x): 0.7707	D(G(z)): 0.0430 / 0.0257
[0/5][250/3166]	Loss_D: 2.3998	Loss_G: 4.0158	D(x): 0.2389	D(G(z)): 0.0034 / 0.0461
[0/5][300/3166]	Loss_D: 0.7523	Loss_G: 7.4294	D(x): 0.5824	D(G(z)): 0.0026 / 0.0015
[0/5][350/3166]	Loss_D: 0.6879	Loss_G: 3.8939	D(x): 0.9261	D(G(z)): 0.3743 / 0.0599
[0/5][400/3166]	Loss_D: 0.8441	Loss_G: 4.9909	D(x): 0.8594	D(G(z)): 0.3877 / 0.0233
[0/5][450/3166]	Loss_D: 0.5056	Loss_G: 5.1846	D(x): 0.8656	D(G(z)): 0.2520 / 0.0098
[0/5][500/3166]	Loss_D: 0.3187	Loss_G: 4.2248	D(x): 0.8987	D(G(z)): 0.1632 / 0.0242
[0/5][550/3166]	Loss_D: 0.8588	Loss_G: 9.1239	D(x): 0.9832	D(G(z)): 0.4433 / 0.0008
[0/5][600/3166]	Loss_D: 0.6482	Loss_G: 5.8809	D(x): 0.8936	D(G(z)): 0.3419 / 0.0057
[0/5][650/3166]	Loss_D: 0.7202	Loss_G: 5.0726	D(x): 0.8060	D(G(z)): 0.3292 / 0.0111
[0/5][700/3166]	Loss_D: 0.4369	Loss_G: 4.5565	D(x): 0.7464	D(G(z)): 0.0396 / 0.0162
[0/5][750/3166]	Loss_D: 0.4883	Loss_G: 4.9284	D(x): 0.8265	D(G(z)): 0.1989 / 0.0155
[0/5][800/3166]	Loss_D: 0.5560	Loss_G: 3.0097	D(x): 0.6977	D(G(z)): 0.0625 / 0.0934
[0/5][850/3166]	Loss_D: 1.0523	Loss_G: 2.9294	D(x): 0.5236	D(G(z)): 0.0206 / 0.0940
[0/5][900/3166]	Loss_D: 0.5023	Loss_G: 4.4592	D(x): 0.7641	D(G(z)): 0.1298 / 0.0233
[0/5][950/3166]	Loss_D: 0.4861	Loss_G: 3.7227	D(x): 0.7510	D(G(z)): 0.0776 / 0.0444
[0/5][1000/3166]	Loss_D: 0.8883	Loss_G: 7.5714	D(x): 0.9426	D(G(z)): 0.4693 / 0.0021
[0/5][1050/3166]	Loss_D: 0.5055	Loss_G: 3.2270	D(x): 0.7763	D(G(z)): 0.1267 / 0.0803
[0/5][1100/3166]	Loss_D: 0.5523	Loss_G: 5.6187	D(x): 0.9821	D(G(z)): 0.3519 / 0.0084
[0/5][1150/3166]	Loss_D: 0.5107	Loss_G: 4.5489	D(x): 0.8115	D(G(z)): 0.2053 / 0.0154
[0/5][1200/3166]	Loss_D: 0.4626	Loss_G: 3.5704	D(x): 0.7457	D(G(z)): 0.0910 / 0.0531
[0/5][1250/3166]	Loss_D: 0.7601	Loss_G: 2.6521	D(x): 0.6541	D(G(z)): 0.0679 / 0.1167
[0/5][1300/3166]	Loss_D: 0.4266	Loss_G: 3.5354	D(x): 0.7306	D(G(z)): 0.0640 / 0.0456
[0/5][1350/3166]	Loss_D: 0.8024	Loss_G: 3.5741	D(x): 0.7786	D(G(z)): 0.3157 / 0.0486
[0/5][1400/3166]	Loss_D: 0.5709	Loss_G: 3.6841	D(x): 0.6658	D(G(z)): 0.0467 / 0.0530
[0/5][1450/3166]	Loss_D: 0.5457	Loss_G: 4.8702	D(x): 0.8770	D(G(z)): 0.2921 / 0.0134
[0/5][1500/3166]	Loss_D: 0.4754	Loss_G: 3.2009	D(x): 0.7467	D(G(z)): 0.0461 / 0.0845
[0/5][1550/3166]	Loss_D: 0.4562	Loss_G: 4.2803	D(x): 0.8171	D(G(z)): 0.1666 / 0.0269
[0/5][1600/3166]	Loss_D: 0.4365	Loss_G: 6.1895	D(x): 0.9360	D(G(z)): 0.2718 / 0.0034
[0/5][1650/3166]	Loss_D: 0.8730	Loss_G: 1.9578	D(x): 0.5507	D(G(z)): 0.0193 / 0.2185
[0/5][1700/3166]	Loss_D: 0.3150	Loss_G: 3.4612	D(x): 0.7972	D(G(z)): 0.0406 / 0.0544
[0/5][1750/3166]	Loss_D: 0.4031	Loss_G: 3.3031	D(x): 0.7869	D(G(z)): 0.0903 / 0.0665
[0/5][1800/3166]	Loss_D: 0.6255	Loss_G: 2.7777	D(x): 0.7321	D(G(z)): 0.1562 / 0.0916
[0/5][1850/3166]	Loss_D: 0.2019	Loss_G: 3.6436	D(x): 0.8966	D(G(z)): 0.0689 / 0.0575
[0/5][1900/3166]	Loss_D: 1.9855	Loss_G: 1.1436	D(x): 0.2412	D(G(z)): 0.0035 / 0.4101
[0/5][1950/3166]	Loss_D: 0.8153	Loss_G: 5.8692	D(x): 0.8937	D(G(z)): 0.4322 / 0.0060
[0/5][2000/3166]	Loss_D: 0.3359	Loss_G: 2.7274	D(x): 0.8627	D(G(z)): 0.1330 / 0.0947
[0/5][2050/3166]	Loss_D: 1.1793	Loss_G: 7.0080	D(x): 0.9142	D(G(z)): 0.5740 / 0.0021
[0/5][2100/3166]	Loss_D: 0.5516	Loss_G: 6.5853	D(x): 0.9243	D(G(z)): 0.3287 / 0.0022
[0/5][2150/3166]	Loss_D: 1.9468	Loss_G: 1.2085	D(x): 0.2752	D(G(z)): 0.0119 / 0.3914
[0/5][2200/3166]	Loss_D: 0.6550	Loss_G: 2.9125	D(x): 0.6098	D(G(z)): 0.0273 / 0.0912
[0/5][2250/3166]	Loss_D: 0.6347	Loss_G: 3.2230	D(x): 0.7354	D(G(z)): 0.2053 / 0.0648
[0/5][2300/3166]	Loss_D: 1.7071	Loss_G: 0.5905	D(x): 0.3040	D(G(z)): 0.0156 / 0.6332
[0/5][2350/3166]	Loss_D: 0.4357	Loss_G: 2.9926	D(x): 0.7741	D(G(z)): 0.1045 / 0.0751
[0/5][2400/3166]	Loss_D: 0.8156	Loss_G: 1.0458	D(x): 0.5390	D(G(z)): 0.0557 / 0.4253
[0/5][2450/3166]	Loss_D: 1.1145	Loss_G: 6.6402	D(x): 0.9525	D(G(z)): 0.5811 / 0.0023
[0/5][2500/3166]	Loss_D: 0.7154	Loss_G: 1.8265	D(x): 0.5806	D(G(z)): 0.0233 / 0.2091
[0/5][2550/3166]	Loss_D: 1.0094	Loss_G: 5.3782	D(x): 0.9554	D(G(z)): 0.5304 / 0.0088
[0/5][2600/3166]	Loss_D: 0.6243	Loss_G: 5.1767	D(x): 0.9645	D(G(z)): 0.3879 / 0.0113
[0/5][2650/3166]	Loss_D: 0.4460	Loss_G: 3.0249	D(x): 0.8433	D(G(z)): 0.1716 / 0.0744
[0/5][2700/3166]	Loss_D: 0.4642	Loss_G: 3.1655	D(x): 0.7882	D(G(z)): 0.1463 / 0.0686
[0/5][2750/3166]	Loss_D: 0.6762	Loss_G: 1.8490	D(x): 0.6935	D(G(z)): 0.2015 / 0.1905
[0/5][2800/3166]	Loss_D: 0.3920	Loss_G: 3.0654	D(x): 0.7797	D(G(z)): 0.0970 / 0.0703
[0/5][2850/3166]	Loss_D: 0.5767	Loss_G: 3.7783	D(x): 0.8580	D(G(z)): 0.2899 / 0.0379
[0/5][2900/3166]	Loss_D: 0.4907	Loss_G: 2.5230	D(x): 0.7623	D(G(z)): 0.1390 / 0.1111
[0/5][2950/3166]	Loss_D: 0.7035	Loss_G: 3.2705	D(x): 0.5817	D(G(z)): 0.0133 / 0.0786
[0/5][3000/3166]	Loss_D: 0.5149	Loss_G: 1.9024	D(x): 0.7010	D(G(z)): 0.1030 / 0.1783
[0/5][3050/3166]	Loss_D: 0.6032	Loss_G: 3.4056	D(x): 0.8151	D(G(z)): 0.2791 / 0.0506
[0/5][3100/3166]	Loss_D: 0.6222	Loss_G: 3.2413	D(x): 0.9062	D(G(z)): 0.3554 / 0.0537
[0/5][3150/3166]	Loss_D: 1.5365	Loss_G: 0.5429	D(x): 0.2953	D(G(z)): 0.0122 / 0.6172
[1/5][0/3166]	Loss_D: 0.5676	Loss_G: 3.2743	D(x): 0.6374	D(G(z)): 0.0307 / 0.0806
[1/5][50/3166]	Loss_D: 0.3659	Loss_G: 2.8547	D(x): 0.7926	D(G(z)): 0.0915 / 0.0836
[1/5][100/3166]	Loss_D: 1.3002	Loss_G: 1.7897	D(x): 0.3690	D(G(z)): 0.0094 / 0.2607
[1/5][150/3166]	Loss_D: 0.6861	Loss_G: 2.0652	D(x): 0.6954	D(G(z)): 0.2124 / 0.1614
[1/5][200/3166]	Loss_D: 0.8381	Loss_G: 1.5608	D(x): 0.5382	D(G(z)): 0.0561 / 0.2551
[1/5][250/3166]	Loss_D: 0.4362	Loss_G: 3.8702	D(x): 0.8610	D(G(z)): 0.2200 / 0.0292
[1/5][300/3166]	Loss_D: 0.5510	Loss_G: 2.5061	D(x): 0.6812	D(G(z)): 0.0900 / 0.1330
[1/5][350/3166]	Loss_D: 0.3409	Loss_G: 3.4438	D(x): 0.8227	D(G(z)): 0.1051 / 0.0548
[1/5][400/3166]	Loss_D: 0.5530	Loss_G: 3.3149	D(x): 0.8746	D(G(z)): 0.3116 / 0.0512
[1/5][450/3166]	Loss_D: 0.4249	Loss_G: 2.7839	D(x): 0.7989	D(G(z)): 0.1272 / 0.0863
[1/5][500/3166]	Loss_D: 0.4980	Loss_G: 3.5868	D(x): 0.9113	D(G(z)): 0.3050 / 0.0365
[1/5][550/3166]	Loss_D: 0.8454	Loss_G: 4.6982	D(x): 0.8751	D(G(z)): 0.4576 / 0.0164
[1/5][600/3166]	Loss_D: 0.7136	Loss_G: 3.6914	D(x): 0.8656	D(G(z)): 0.3346 / 0.0451
[1/5][650/3166]	Loss_D: 1.1043	Loss_G: 5.4778	D(x): 0.9391	D(G(z)): 0.5764 / 0.0064
[1/5][700/3166]	Loss_D: 0.5434	Loss_G: 2.2482	D(x): 0.7712	D(G(z)): 0.1976 / 0.1335
[1/5][750/3166]	Loss_D: 0.5210	Loss_G: 2.3608	D(x): 0.7747	D(G(z)): 0.1998 / 0.1227
[1/5][800/3166]	Loss_D: 0.6068	Loss_G: 1.7439	D(x): 0.6542	D(G(z)): 0.0910 / 0.2162
[1/5][850/3166]	Loss_D: 0.6962	Loss_G: 2.7886	D(x): 0.6003	D(G(z)): 0.0499 / 0.1069
[1/5][900/3166]	Loss_D: 1.1576	Loss_G: 1.4866	D(x): 0.4250	D(G(z)): 0.0188 / 0.2959
[1/5][950/3166]	Loss_D: 0.4273	Loss_G: 2.7571	D(x): 0.8470	D(G(z)): 0.2063 / 0.0804
[1/5][1000/3166]	Loss_D: 0.6678	Loss_G: 1.7347	D(x): 0.6794	D(G(z)): 0.1795 / 0.2372
[1/5][1050/3166]	Loss_D: 0.4567	Loss_G: 2.2935	D(x): 0.7732	D(G(z)): 0.1473 / 0.1240
[1/5][1100/3166]	Loss_D: 1.5792	Loss_G: 0.8089	D(x): 0.3026	D(G(z)): 0.0254 / 0.5154
[1/5][1150/3166]	Loss_D: 0.8694	Loss_G: 1.6111	D(x): 0.5796	D(G(z)): 0.1924 / 0.2415
[1/5][1200/3166]	Loss_D: 1.2517	Loss_G: 5.8283	D(x): 0.9719	D(G(z)): 0.6584 / 0.0049
[1/5][1250/3166]	Loss_D: 0.7367	Loss_G: 3.4709	D(x): 0.8534	D(G(z)): 0.3968 / 0.0415
[1/5][1300/3166]	Loss_D: 0.5712	Loss_G: 2.7518	D(x): 0.6912	D(G(z)): 0.1285 / 0.0937
[1/5][1350/3166]	Loss_D: 1.3222	Loss_G: 4.7771	D(x): 0.9417	D(G(z)): 0.6455 / 0.0164
[1/5][1400/3166]	Loss_D: 0.5336	Loss_G: 2.9248	D(x): 0.8676	D(G(z)): 0.2847 / 0.0712
[1/5][1450/3166]	Loss_D: 0.6033	Loss_G: 2.4165	D(x): 0.6867	D(G(z)): 0.1471 / 0.1129
[1/5][1500/3166]	Loss_D: 0.4988	Loss_G: 1.6821	D(x): 0.8104	D(G(z)): 0.2167 / 0.2205
[1/5][1550/3166]	Loss_D: 0.4430	Loss_G: 3.6230	D(x): 0.8107	D(G(z)): 0.1737 / 0.0373
[1/5][1600/3166]	Loss_D: 0.4323	Loss_G: 3.4361	D(x): 0.8569	D(G(z)): 0.2211 / 0.0436
[1/5][1650/3166]	Loss_D: 1.0114	Loss_G: 0.6869	D(x): 0.5489	D(G(z)): 0.1968 / 0.5466
[1/5][1700/3166]	Loss_D: 1.1545	Loss_G: 5.2528	D(x): 0.9473	D(G(z)): 0.5901 / 0.0087
[1/5][1750/3166]	Loss_D: 0.6465	Loss_G: 2.4611	D(x): 0.6109	D(G(z)): 0.0389 / 0.1240
[1/5][1800/3166]	Loss_D: 0.5087	Loss_G: 2.6903	D(x): 0.7308	D(G(z)): 0.1375 / 0.0993
[1/5][1850/3166]	Loss_D: 0.4729	Loss_G: 2.6751	D(x): 0.7431	D(G(z)): 0.1271 / 0.1032
[1/5][1900/3166]	Loss_D: 0.6451	Loss_G: 2.9842	D(x): 0.7920	D(G(z)): 0.2843 / 0.0678
[1/5][1950/3166]	Loss_D: 0.3968	Loss_G: 2.0568	D(x): 0.7600	D(G(z)): 0.0811 / 0.1641
[1/5][2000/3166]	Loss_D: 1.1687	Loss_G: 1.0279	D(x): 0.3846	D(G(z)): 0.0370 / 0.4227
[1/5][2050/3166]	Loss_D: 0.6784	Loss_G: 4.4617	D(x): 0.9058	D(G(z)): 0.3927 / 0.0174
[1/5][2100/3166]	Loss_D: 1.2318	Loss_G: 4.9284	D(x): 0.9727	D(G(z)): 0.6384 / 0.0111
[1/5][2150/3166]	Loss_D: 0.4221	Loss_G: 3.3077	D(x): 0.8339	D(G(z)): 0.1931 / 0.0552
[1/5][2200/3166]	Loss_D: 1.6975	Loss_G: 1.9306	D(x): 0.2543	D(G(z)): 0.0088 / 0.2187
[1/5][2250/3166]	Loss_D: 0.5881	Loss_G: 2.0728	D(x): 0.6936	D(G(z)): 0.1420 / 0.1597
[1/5][2300/3166]	Loss_D: 2.4971	Loss_G: 0.8107	D(x): 0.1377	D(G(z)): 0.0026 / 0.5075
[1/5][2350/3166]	Loss_D: 0.7484	Loss_G: 1.7095	D(x): 0.5983	D(G(z)): 0.1254 / 0.2392
[1/5][2400/3166]	Loss_D: 0.4690	Loss_G: 2.0957	D(x): 0.8233	D(G(z)): 0.2175 / 0.1498
[1/5][2450/3166]	Loss_D: 1.4554	Loss_G: 4.3735	D(x): 0.9510	D(G(z)): 0.6734 / 0.0201
[1/5][2500/3166]	Loss_D: 0.6591	Loss_G: 1.3195	D(x): 0.6246	D(G(z)): 0.1064 / 0.3056
[1/5][2550/3166]	Loss_D: 0.6155	Loss_G: 1.9854	D(x): 0.6324	D(G(z)): 0.0343 / 0.1912
[1/5][2600/3166]	Loss_D: 0.9509	Loss_G: 5.1751	D(x): 0.9819	D(G(z)): 0.5513 / 0.0094
[1/5][2650/3166]	Loss_D: 0.5524	Loss_G: 2.2367	D(x): 0.7083	D(G(z)): 0.1303 / 0.1406
[1/5][2700/3166]	Loss_D: 0.7540	Loss_G: 2.3324	D(x): 0.6227	D(G(z)): 0.1186 / 0.1282
[1/5][2750/3166]	Loss_D: 0.6051	Loss_G: 4.0613	D(x): 0.8973	D(G(z)): 0.3561 / 0.0265
[1/5][2800/3166]	Loss_D: 0.6825	Loss_G: 1.6872	D(x): 0.6364	D(G(z)): 0.1259 / 0.2236
[1/5][2850/3166]	Loss_D: 0.8625	Loss_G: 4.2965	D(x): 0.9049	D(G(z)): 0.4535 / 0.0202
[1/5][2900/3166]	Loss_D: 0.4288	Loss_G: 2.3523	D(x): 0.7499	D(G(z)): 0.1042 / 0.1264
[1/5][2950/3166]	Loss_D: 0.6352	Loss_G: 1.9841	D(x): 0.6821	D(G(z)): 0.1575 / 0.1686
[1/5][3000/3166]	Loss_D: 0.7697	Loss_G: 1.5940	D(x): 0.5409	D(G(z)): 0.0309 / 0.2689
[1/5][3050/3166]	Loss_D: 0.9767	Loss_G: 1.3044	D(x): 0.4363	D(G(z)): 0.0319 / 0.3404
[1/5][3100/3166]	Loss_D: 0.2893	Loss_G: 3.4197	D(x): 0.8056	D(G(z)): 0.0479 / 0.0557
[1/5][3150/3166]	Loss_D: 0.4649	Loss_G: 3.4400	D(x): 0.8200	D(G(z)): 0.1941 / 0.0507
[2/5][0/3166]	Loss_D: 0.6268	Loss_G: 3.5376	D(x): 0.8305	D(G(z)): 0.3078 / 0.0382
[2/5][50/3166]	Loss_D: 0.8402	Loss_G: 4.3434	D(x): 0.8384	D(G(z)): 0.4344 / 0.0214
[2/5][100/3166]	Loss_D: 0.8138	Loss_G: 4.6120	D(x): 0.8447	D(G(z)): 0.4072 / 0.0141
[2/5][150/3166]	Loss_D: 0.5523	Loss_G: 2.5056	D(x): 0.7069	D(G(z)): 0.1291 / 0.1145
[2/5][200/3166]	Loss_D: 0.4301	Loss_G: 3.9633	D(x): 0.8570	D(G(z)): 0.2140 / 0.0258
[2/5][250/3166]	Loss_D: 0.6137	Loss_G: 2.7828	D(x): 0.7924	D(G(z)): 0.2701 / 0.0776
[2/5][300/3166]	Loss_D: 0.6630	Loss_G: 3.0777	D(x): 0.8350	D(G(z)): 0.3401 / 0.0648
[2/5][350/3166]	Loss_D: 0.6957	Loss_G: 2.1541	D(x): 0.7274	D(G(z)): 0.2688 / 0.1465
[2/5][400/3166]	Loss_D: 0.5148	Loss_G: 3.0123	D(x): 0.8378	D(G(z)): 0.2494 / 0.0647
[2/5][450/3166]	Loss_D: 0.5043	Loss_G: 1.7125	D(x): 0.7293	D(G(z)): 0.1293 / 0.2267
[2/5][500/3166]	Loss_D: 1.1194	Loss_G: 0.4880	D(x): 0.4304	D(G(z)): 0.0841 / 0.6490
[2/5][550/3166]	Loss_D: 0.6003	Loss_G: 1.7285	D(x): 0.6583	D(G(z)): 0.1054 / 0.2176
[2/5][600/3166]	Loss_D: 0.5923	Loss_G: 3.5218	D(x): 0.9023	D(G(z)): 0.3433 / 0.0432
[2/5][650/3166]	Loss_D: 0.4469	Loss_G: 2.9664	D(x): 0.7471	D(G(z)): 0.0958 / 0.0720
[2/5][700/3166]	Loss_D: 0.6248	Loss_G: 2.0202	D(x): 0.6925	D(G(z)): 0.1768 / 0.1612
[2/5][750/3166]	Loss_D: 0.7656	Loss_G: 5.3554	D(x): 0.9207	D(G(z)): 0.4313 / 0.0079
[2/5][800/3166]	Loss_D: 0.7688	Loss_G: 1.3960	D(x): 0.5972	D(G(z)): 0.1646 / 0.3043
[2/5][850/3166]	Loss_D: 0.5262	Loss_G: 1.6221	D(x): 0.6897	D(G(z)): 0.1006 / 0.2304
[2/5][900/3166]	Loss_D: 0.3813	Loss_G: 4.5511	D(x): 0.9503	D(G(z)): 0.2528 / 0.0151
[2/5][950/3166]	Loss_D: 0.4073	Loss_G: 3.2329	D(x): 0.8977	D(G(z)): 0.2176 / 0.0562
[2/5][1000/3166]	Loss_D: 0.3837	Loss_G: 3.0642	D(x): 0.8899	D(G(z)): 0.2131 / 0.0593
[2/5][1050/3166]	Loss_D: 0.5776	Loss_G: 4.0231	D(x): 0.9494	D(G(z)): 0.3565 / 0.0267
[2/5][1100/3166]	Loss_D: 0.6230	Loss_G: 3.5856	D(x): 0.9159	D(G(z)): 0.3751 / 0.0388
[2/5][1150/3166]	Loss_D: 0.7770	Loss_G: 1.6995	D(x): 0.5645	D(G(z)): 0.0817 / 0.2326
[2/5][1200/3166]	Loss_D: 0.6637	Loss_G: 2.6497	D(x): 0.8207	D(G(z)): 0.3255 / 0.0921
[2/5][1250/3166]	Loss_D: 0.3866	Loss_G: 2.8384	D(x): 0.8249	D(G(z)): 0.1502 / 0.0740
[2/5][1300/3166]	Loss_D: 0.4930	Loss_G: 2.2011	D(x): 0.7404	D(G(z)): 0.1344 / 0.1445
[2/5][1350/3166]	Loss_D: 1.0743	Loss_G: 0.9291	D(x): 0.4230	D(G(z)): 0.0515 / 0.4568
[2/5][1400/3166]	Loss_D: 0.4596	Loss_G: 2.6504	D(x): 0.7175	D(G(z)): 0.0710 / 0.1123
[2/5][1450/3166]	Loss_D: 0.4997	Loss_G: 3.7303	D(x): 0.8907	D(G(z)): 0.2605 / 0.0353
[2/5][1500/3166]	Loss_D: 0.4314	Loss_G: 2.3251	D(x): 0.7509	D(G(z)): 0.1032 / 0.1205
[2/5][1550/3166]	Loss_D: 1.0834	Loss_G: 2.3701	D(x): 0.4921	D(G(z)): 0.1276 / 0.1532
[2/5][1600/3166]	Loss_D: 0.8213	Loss_G: 1.4896	D(x): 0.5744	D(G(z)): 0.1339 / 0.2629
[2/5][1650/3166]	Loss_D: 0.5494	Loss_G: 1.9493	D(x): 0.6747	D(G(z)): 0.0889 / 0.1754
[2/5][1700/3166]	Loss_D: 0.4048	Loss_G: 3.6324	D(x): 0.9319	D(G(z)): 0.2521 / 0.0399
[2/5][1750/3166]	Loss_D: 0.6468	Loss_G: 1.5268	D(x): 0.6501	D(G(z)): 0.1281 / 0.2673
[2/5][1800/3166]	Loss_D: 0.3547	Loss_G: 3.6106	D(x): 0.9081	D(G(z)): 0.2092 / 0.0384
[2/5][1850/3166]	Loss_D: 0.3416	Loss_G: 3.6218	D(x): 0.9211	D(G(z)): 0.2098 / 0.0373
[2/5][1900/3166]	Loss_D: 0.7223	Loss_G: 2.7011	D(x): 0.6730	D(G(z)): 0.1700 / 0.0983
[2/5][1950/3166]	Loss_D: 0.6198	Loss_G: 1.9064	D(x): 0.7046	D(G(z)): 0.1861 / 0.1892
[2/5][2000/3166]	Loss_D: 0.4505	Loss_G: 2.4006	D(x): 0.7998	D(G(z)): 0.1684 / 0.1128
[2/5][2050/3166]	Loss_D: 0.5943	Loss_G: 1.5861	D(x): 0.6250	D(G(z)): 0.0509 / 0.2679
[2/5][2100/3166]	Loss_D: 0.5992	Loss_G: 2.4228	D(x): 0.6424	D(G(z)): 0.0806 / 0.1468
[2/5][2150/3166]	Loss_D: 0.5091	Loss_G: 2.5352	D(x): 0.7685	D(G(z)): 0.1735 / 0.0992
[2/5][2200/3166]	Loss_D: 0.5503	Loss_G: 2.4584	D(x): 0.6622	D(G(z)): 0.0740 / 0.1212
[2/5][2250/3166]	Loss_D: 0.7241	Loss_G: 1.0942	D(x): 0.5686	D(G(z)): 0.0681 / 0.3769
[2/5][2300/3166]	Loss_D: 0.5968	Loss_G: 1.6153	D(x): 0.7160	D(G(z)): 0.1766 / 0.2402
[2/5][2350/3166]	Loss_D: 0.2717	Loss_G: 3.7202	D(x): 0.9332	D(G(z)): 0.1656 / 0.0379
[2/5][2400/3166]	Loss_D: 0.9407	Loss_G: 4.9947	D(x): 0.9300	D(G(z)): 0.5182 / 0.0097
[2/5][2450/3166]	Loss_D: 0.5677	Loss_G: 3.3061	D(x): 0.7817	D(G(z)): 0.2265 / 0.0535
[2/5][2500/3166]	Loss_D: 0.5343	Loss_G: 3.0287	D(x): 0.8712	D(G(z)): 0.2839 / 0.0705
[2/5][2550/3166]	Loss_D: 0.5636	Loss_G: 2.0262	D(x): 0.6723	D(G(z)): 0.0983 / 0.1779
[2/5][2600/3166]	Loss_D: 0.5139	Loss_G: 3.1984	D(x): 0.8924	D(G(z)): 0.2892 / 0.0541
[2/5][2650/3166]	Loss_D: 0.6075	Loss_G: 3.9065	D(x): 0.8793	D(G(z)): 0.3420 / 0.0289
[2/5][2700/3166]	Loss_D: 0.3438	Loss_G: 3.0037	D(x): 0.8970	D(G(z)): 0.1762 / 0.0673
[2/5][2750/3166]	Loss_D: 0.6793	Loss_G: 5.3888	D(x): 0.9575	D(G(z)): 0.4146 / 0.0075
[2/5][2800/3166]	Loss_D: 0.9888	Loss_G: 3.7763	D(x): 0.8387	D(G(z)): 0.4537 / 0.0434
[2/5][2850/3166]	Loss_D: 0.4214	Loss_G: 3.8356	D(x): 0.9331	D(G(z)): 0.2617 / 0.0302
[2/5][2900/3166]	Loss_D: 0.4894	Loss_G: 2.0122	D(x): 0.7208	D(G(z)): 0.1193 / 0.1626
[2/5][2950/3166]	Loss_D: 0.5242	Loss_G: 5.2094	D(x): 0.9174	D(G(z)): 0.3105 / 0.0108
[2/5][3000/3166]	Loss_D: 0.6143	Loss_G: 4.2766	D(x): 0.9360	D(G(z)): 0.3771 / 0.0199
[2/5][3050/3166]	Loss_D: 0.4720	Loss_G: 2.9640	D(x): 0.8351	D(G(z)): 0.2159 / 0.0703
[2/5][3100/3166]	Loss_D: 0.4160	Loss_G: 3.6432	D(x): 0.9390	D(G(z)): 0.2562 / 0.0380
[2/5][3150/3166]	Loss_D: 0.5682	Loss_G: 2.4607	D(x): 0.6985	D(G(z)): 0.1405 / 0.1170
[3/5][0/3166]	Loss_D: 0.6590	Loss_G: 4.4163	D(x): 0.9365	D(G(z)): 0.3984 / 0.0193
[3/5][50/3166]	Loss_D: 0.4175	Loss_G: 2.3804	D(x): 0.7776	D(G(z)): 0.1026 / 0.1174
[3/5][100/3166]	Loss_D: 0.5835	Loss_G: 2.9101	D(x): 0.8008	D(G(z)): 0.2526 / 0.0690
[3/5][150/3166]	Loss_D: 0.3999	Loss_G: 2.3401	D(x): 0.8193	D(G(z)): 0.1571 / 0.1146
[3/5][200/3166]	Loss_D: 0.2984	Loss_G: 3.7114	D(x): 0.9275	D(G(z)): 0.1812 / 0.0345
[3/5][250/3166]	Loss_D: 0.4833	Loss_G: 4.6016	D(x): 0.9340	D(G(z)): 0.3031 / 0.0145
[3/5][300/3166]	Loss_D: 0.3951	Loss_G: 2.6666	D(x): 0.8464	D(G(z)): 0.1809 / 0.0885
[3/5][350/3166]	Loss_D: 0.3253	Loss_G: 2.9214	D(x): 0.8807	D(G(z)): 0.1572 / 0.0675
[3/5][400/3166]	Loss_D: 0.7100	Loss_G: 3.1842	D(x): 0.8805	D(G(z)): 0.3843 / 0.0568
[3/5][450/3166]	Loss_D: 0.7374	Loss_G: 4.2338	D(x): 0.8857	D(G(z)): 0.4145 / 0.0234
[3/5][500/3166]	Loss_D: 0.5560	Loss_G: 4.5884	D(x): 0.9329	D(G(z)): 0.3392 / 0.0151
[3/5][550/3166]	Loss_D: 0.4587	Loss_G: 2.0960	D(x): 0.7812	D(G(z)): 0.1377 / 0.1604
[3/5][600/3166]	Loss_D: 1.1336	Loss_G: 1.0779	D(x): 0.4026	D(G(z)): 0.0484 / 0.4070
[3/5][650/3166]	Loss_D: 0.5228	Loss_G: 3.2188	D(x): 0.8545	D(G(z)): 0.2528 / 0.0506
[3/5][700/3166]	Loss_D: 1.3274	Loss_G: 1.4248	D(x): 0.3825	D(G(z)): 0.0981 / 0.3262
[3/5][750/3166]	Loss_D: 0.7242	Loss_G: 1.6020	D(x): 0.5601	D(G(z)): 0.0366 / 0.2502
[3/5][800/3166]	Loss_D: 0.4057	Loss_G: 3.0906	D(x): 0.9208	D(G(z)): 0.2470 / 0.0609
[3/5][850/3166]	Loss_D: 1.0266	Loss_G: 5.5280	D(x): 0.9667	D(G(z)): 0.5556 / 0.0080
[3/5][900/3166]	Loss_D: 0.4634	Loss_G: 2.6582	D(x): 0.8344	D(G(z)): 0.2092 / 0.0941
[3/5][950/3166]	Loss_D: 0.3842	Loss_G: 3.1883	D(x): 0.8166	D(G(z)): 0.1413 / 0.0549
[3/5][1000/3166]	Loss_D: 0.4620	Loss_G: 2.6901	D(x): 0.7879	D(G(z)): 0.1556 / 0.0935
[3/5][1050/3166]	Loss_D: 0.5020	Loss_G: 2.2728	D(x): 0.7003	D(G(z)): 0.0850 / 0.1477
[3/5][1100/3166]	Loss_D: 0.6471	Loss_G: 1.6735	D(x): 0.6222	D(G(z)): 0.0584 / 0.2684
[3/5][1150/3166]	Loss_D: 0.3469	Loss_G: 2.7671	D(x): 0.8678	D(G(z)): 0.1582 / 0.0893
[3/5][1200/3166]	Loss_D: 0.3731	Loss_G: 2.8730	D(x): 0.7412	D(G(z)): 0.0359 / 0.0780
[3/5][1250/3166]	Loss_D: 0.7230	Loss_G: 5.6678	D(x): 0.9272	D(G(z)): 0.4235 / 0.0053
[3/5][1300/3166]	Loss_D: 0.4142	Loss_G: 2.7450	D(x): 0.7981	D(G(z)): 0.1365 / 0.0852
[3/5][1350/3166]	Loss_D: 0.2990	Loss_G: 3.3249	D(x): 0.8759	D(G(z)): 0.1335 / 0.0476
[3/5][1400/3166]	Loss_D: 0.2953	Loss_G: 3.1572	D(x): 0.8921	D(G(z)): 0.1369 / 0.0582
[3/5][1450/3166]	Loss_D: 0.6463	Loss_G: 0.7551	D(x): 0.6322	D(G(z)): 0.0886 / 0.5079
[3/5][1500/3166]	Loss_D: 0.9958	Loss_G: 1.4503	D(x): 0.4785	D(G(z)): 0.0448 / 0.3117
[3/5][1550/3166]	Loss_D: 0.3909	Loss_G: 2.3330	D(x): 0.8067	D(G(z)): 0.1166 / 0.1287
[3/5][1600/3166]	Loss_D: 0.4751	Loss_G: 3.7788	D(x): 0.9374	D(G(z)): 0.2801 / 0.0336
[3/5][1650/3166]	Loss_D: 0.5573	Loss_G: 1.8711	D(x): 0.6398	D(G(z)): 0.0485 / 0.2002
[3/5][1700/3166]	Loss_D: 0.3901	Loss_G: 2.5152	D(x): 0.7756	D(G(z)): 0.0720 / 0.1157
[3/5][1750/3166]	Loss_D: 0.2939	Loss_G: 3.1870	D(x): 0.8719	D(G(z)): 0.1281 / 0.0543
[3/5][1800/3166]	Loss_D: 0.3975	Loss_G: 4.2488	D(x): 0.9449	D(G(z)): 0.2617 / 0.0207
[3/5][1850/3166]	Loss_D: 0.4070	Loss_G: 2.9132	D(x): 0.7918	D(G(z)): 0.1262 / 0.0775
[3/5][1900/3166]	Loss_D: 0.4599	Loss_G: 2.1978	D(x): 0.7718	D(G(z)): 0.1433 / 0.1449
[3/5][1950/3166]	Loss_D: 0.4910	Loss_G: 4.0015	D(x): 0.8852	D(G(z)): 0.2699 / 0.0277
[3/5][2000/3166]	Loss_D: 1.1023	Loss_G: 0.6863	D(x): 0.4396	D(G(z)): 0.0307 / 0.5565
[3/5][2050/3166]	Loss_D: 0.3951	Loss_G: 2.7289	D(x): 0.8444	D(G(z)): 0.1743 / 0.0932
[3/5][2100/3166]	Loss_D: 0.4715	Loss_G: 3.5986	D(x): 0.9215	D(G(z)): 0.2861 / 0.0373
[3/5][2150/3166]	Loss_D: 0.9884	Loss_G: 1.5918	D(x): 0.4646	D(G(z)): 0.0495 / 0.2899
[3/5][2200/3166]	Loss_D: 0.8472	Loss_G: 1.8403	D(x): 0.5314	D(G(z)): 0.0735 / 0.2389
[3/5][2250/3166]	Loss_D: 0.8182	Loss_G: 0.9276	D(x): 0.5198	D(G(z)): 0.0607 / 0.4599
[3/5][2300/3166]	Loss_D: 0.3010	Loss_G: 3.5808	D(x): 0.8596	D(G(z)): 0.1172 / 0.0478
[3/5][2350/3166]	Loss_D: 0.3932	Loss_G: 2.8748	D(x): 0.7781	D(G(z)): 0.0857 / 0.0789
[3/5][2400/3166]	Loss_D: 0.8486	Loss_G: 1.9663	D(x): 0.5783	D(G(z)): 0.1219 / 0.2057
[3/5][2450/3166]	Loss_D: 0.3008	Loss_G: 3.3302	D(x): 0.9119	D(G(z)): 0.1758 / 0.0438
[3/5][2500/3166]	Loss_D: 0.8199	Loss_G: 5.1225	D(x): 0.9425	D(G(z)): 0.4641 / 0.0103
[3/5][2550/3166]	Loss_D: 0.4125	Loss_G: 2.5726	D(x): 0.8004	D(G(z)): 0.1325 / 0.1006
[3/5][2600/3166]	Loss_D: 0.5016	Loss_G: 1.9667	D(x): 0.6958	D(G(z)): 0.0815 / 0.1792
[3/5][2650/3166]	Loss_D: 0.6450	Loss_G: 4.2851	D(x): 0.9519	D(G(z)): 0.3875 / 0.0186
[3/5][2700/3166]	Loss_D: 0.5840	Loss_G: 2.0413	D(x): 0.6863	D(G(z)): 0.1154 / 0.1707
[3/5][2750/3166]	Loss_D: 0.3808	Loss_G: 2.6146	D(x): 0.8035	D(G(z)): 0.1176 / 0.0938
[3/5][2800/3166]	Loss_D: 0.3720	Loss_G: 2.5947	D(x): 0.8118	D(G(z)): 0.1261 / 0.0963
[3/5][2850/3166]	Loss_D: 0.6433	Loss_G: 2.4148	D(x): 0.7595	D(G(z)): 0.2522 / 0.1173
[3/5][2900/3166]	Loss_D: 0.4726	Loss_G: 3.2051	D(x): 0.8544	D(G(z)): 0.2401 / 0.0532
[3/5][2950/3166]	Loss_D: 0.5056	Loss_G: 3.4035	D(x): 0.8714	D(G(z)): 0.2699 / 0.0438
[3/5][3000/3166]	Loss_D: 0.9341	Loss_G: 1.0843	D(x): 0.5658	D(G(z)): 0.1600 / 0.4195
[3/5][3050/3166]	Loss_D: 0.5041	Loss_G: 2.0527	D(x): 0.7292	D(G(z)): 0.1349 / 0.1720
[3/5][3100/3166]	Loss_D: 1.0049	Loss_G: 4.2026	D(x): 0.9925	D(G(z)): 0.5597 / 0.0255
[3/5][3150/3166]	Loss_D: 0.3828	Loss_G: 3.9039	D(x): 0.9384	D(G(z)): 0.2352 / 0.0315
[4/5][0/3166]	Loss_D: 0.5428	Loss_G: 2.0206	D(x): 0.6589	D(G(z)): 0.0630 / 0.2100
[4/5][50/3166]	Loss_D: 0.3398	Loss_G: 3.1415	D(x): 0.8927	D(G(z)): 0.1732 / 0.0573
[4/5][100/3166]	Loss_D: 0.6086	Loss_G: 2.8654	D(x): 0.8010	D(G(z)): 0.2550 / 0.0829
[4/5][150/3166]	Loss_D: 0.3528	Loss_G: 2.1001	D(x): 0.8526	D(G(z)): 0.1424 / 0.1637
[4/5][200/3166]	Loss_D: 0.4588	Loss_G: 2.9220	D(x): 0.7589	D(G(z)): 0.1338 / 0.0753
[4/5][250/3166]	Loss_D: 0.6884	Loss_G: 5.4827	D(x): 0.9842	D(G(z)): 0.4282 / 0.0055
[4/5][300/3166]	Loss_D: 0.3960	Loss_G: 2.7679	D(x): 0.7408	D(G(z)): 0.0428 / 0.0924
[4/5][350/3166]	Loss_D: 0.5349	Loss_G: 2.1999	D(x): 0.6907	D(G(z)): 0.0917 / 0.1584
[4/5][400/3166]	Loss_D: 0.4446	Loss_G: 3.3950	D(x): 0.8910	D(G(z)): 0.2518 / 0.0513
[4/5][450/3166]	Loss_D: 0.5923	Loss_G: 2.3101	D(x): 0.6837	D(G(z)): 0.1223 / 0.1555
[4/5][500/3166]	Loss_D: 0.5074	Loss_G: 3.8487	D(x): 0.8997	D(G(z)): 0.2925 / 0.0324
[4/5][550/3166]	Loss_D: 0.6269	Loss_G: 4.6329	D(x): 0.9236	D(G(z)): 0.3701 / 0.0136
[4/5][600/3166]	Loss_D: 0.3890	Loss_G: 4.2085	D(x): 0.9189	D(G(z)): 0.2395 / 0.0217
[4/5][650/3166]	Loss_D: 0.4072	Loss_G: 2.7743	D(x): 0.8504	D(G(z)): 0.1895 / 0.0832
[4/5][700/3166]	Loss_D: 0.3615	Loss_G: 2.7440	D(x): 0.8972	D(G(z)): 0.1969 / 0.0833
[4/5][750/3166]	Loss_D: 0.4852	Loss_G: 1.6305	D(x): 0.7312	D(G(z)): 0.0997 / 0.2538
[4/5][800/3166]	Loss_D: 0.2512	Loss_G: 4.0526	D(x): 0.9541	D(G(z)): 0.1604 / 0.0261
[4/5][850/3166]	Loss_D: 0.6189	Loss_G: 3.1025	D(x): 0.7287	D(G(z)): 0.2099 / 0.0651
[4/5][900/3166]	Loss_D: 0.4371	Loss_G: 2.5419	D(x): 0.8813	D(G(z)): 0.2300 / 0.1112
[4/5][950/3166]	Loss_D: 0.3169	Loss_G: 3.5558	D(x): 0.8344	D(G(z)): 0.1065 / 0.0408
[4/5][1000/3166]	Loss_D: 0.5772	Loss_G: 2.0931	D(x): 0.6430	D(G(z)): 0.0374 / 0.1812
[4/5][1050/3166]	Loss_D: 0.3652	Loss_G: 3.7797	D(x): 0.9172	D(G(z)): 0.2190 / 0.0297
[4/5][1100/3166]	Loss_D: 0.4874	Loss_G: 2.9724	D(x): 0.7642	D(G(z)): 0.1460 / 0.0725
[4/5][1150/3166]	Loss_D: 0.8227	Loss_G: 1.5644	D(x): 0.5705	D(G(z)): 0.1232 / 0.2745
[4/5][1200/3166]	Loss_D: 0.3965	Loss_G: 3.0346	D(x): 0.7808	D(G(z)): 0.1060 / 0.0831
[4/5][1250/3166]	Loss_D: 0.3243	Loss_G: 3.6558	D(x): 0.8945	D(G(z)): 0.1701 / 0.0389
[4/5][1300/3166]	Loss_D: 0.5161	Loss_G: 4.9814	D(x): 0.9796	D(G(z)): 0.3536 / 0.0089
[4/5][1350/3166]	Loss_D: 0.5559	Loss_G: 1.5701	D(x): 0.6762	D(G(z)): 0.0989 / 0.2507
[4/5][1400/3166]	Loss_D: 0.6217	Loss_G: 1.4592	D(x): 0.6929	D(G(z)): 0.1244 / 0.3040
[4/5][1450/3166]	Loss_D: 0.3091	Loss_G: 3.3244	D(x): 0.8487	D(G(z)): 0.0992 / 0.0490
[4/5][1500/3166]	Loss_D: 0.3697	Loss_G: 2.4968	D(x): 0.8305	D(G(z)): 0.1401 / 0.1084
[4/5][1550/3166]	Loss_D: 0.3095	Loss_G: 2.8642	D(x): 0.8814	D(G(z)): 0.1390 / 0.0762
[4/5][1600/3166]	Loss_D: 3.4334	Loss_G: 0.1482	D(x): 0.0719	D(G(z)): 0.0121 / 0.8788
[4/5][1650/3166]	Loss_D: 0.4692	Loss_G: 2.7228	D(x): 0.7751	D(G(z)): 0.1508 / 0.0974
[4/5][1700/3166]	Loss_D: 0.4450	Loss_G: 3.0167	D(x): 0.8357	D(G(z)): 0.1919 / 0.0702
[4/5][1750/3166]	Loss_D: 0.4634	Loss_G: 1.9224	D(x): 0.7049	D(G(z)): 0.0503 / 0.1994
[4/5][1800/3166]	Loss_D: 0.7961	Loss_G: 5.9139	D(x): 0.9622	D(G(z)): 0.4665 / 0.0043
[4/5][1850/3166]	Loss_D: 0.2848	Loss_G: 3.9693	D(x): 0.9035	D(G(z)): 0.1484 / 0.0249
[4/5][1900/3166]	Loss_D: 1.4412	Loss_G: 0.5610	D(x): 0.3947	D(G(z)): 0.1814 / 0.6141
[4/5][1950/3166]	Loss_D: 0.5437	Loss_G: 2.2137	D(x): 0.7374	D(G(z)): 0.1361 / 0.1544
[4/5][2000/3166]	Loss_D: 0.8925	Loss_G: 4.8981	D(x): 0.9622	D(G(z)): 0.5020 / 0.0114
[4/5][2050/3166]	Loss_D: 0.3029	Loss_G: 3.3078	D(x): 0.9171	D(G(z)): 0.1803 / 0.0439
[4/5][2100/3166]	Loss_D: 3.5886	Loss_G: 5.9570	D(x): 0.9940	D(G(z)): 0.9426 / 0.0050
[4/5][2150/3166]	Loss_D: 0.6468	Loss_G: 4.4446	D(x): 0.8676	D(G(z)): 0.3147 / 0.0167
[4/5][2200/3166]	Loss_D: 0.4728	Loss_G: 2.4909	D(x): 0.6923	D(G(z)): 0.0473 / 0.1273
[4/5][2250/3166]	Loss_D: 0.3434	Loss_G: 3.2384	D(x): 0.8978	D(G(z)): 0.1873 / 0.0514
[4/5][2300/3166]	Loss_D: 0.7970	Loss_G: 4.3926	D(x): 0.8998	D(G(z)): 0.4304 / 0.0211
[4/5][2350/3166]	Loss_D: 0.7631	Loss_G: 2.3058	D(x): 0.5922	D(G(z)): 0.0953 / 0.1494
[4/5][2400/3166]	Loss_D: 0.4578	Loss_G: 4.1146	D(x): 0.8869	D(G(z)): 0.2459 / 0.0249
[4/5][2450/3166]	Loss_D: 0.4805	Loss_G: 1.7546	D(x): 0.7521	D(G(z)): 0.1281 / 0.2187
[4/5][2500/3166]	Loss_D: 0.3802	Loss_G: 3.7364	D(x): 0.9435	D(G(z)): 0.2444 / 0.0340
[4/5][2550/3166]	Loss_D: 0.3751	Loss_G: 4.1790	D(x): 0.8797	D(G(z)): 0.1895 / 0.0209
[4/5][2600/3166]	Loss_D: 0.4045	Loss_G: 4.6297	D(x): 0.9277	D(G(z)): 0.2534 / 0.0136
[4/5][2650/3166]	Loss_D: 0.3588	Loss_G: 3.3817	D(x): 0.8298	D(G(z)): 0.1179 / 0.0614
[4/5][2700/3166]	Loss_D: 0.2454	Loss_G: 3.2073	D(x): 0.8621	D(G(z)): 0.0769 / 0.0601
[4/5][2750/3166]	Loss_D: 2.4832	Loss_G: 4.8215	D(x): 0.9533	D(G(z)): 0.8446 / 0.0189
[4/5][2800/3166]	Loss_D: 0.4377	Loss_G: 2.2392	D(x): 0.7213	D(G(z)): 0.0529 / 0.1437
[4/5][2850/3166]	Loss_D: 0.5460	Loss_G: 2.5391	D(x): 0.7676	D(G(z)): 0.2091 / 0.0971
[4/5][2900/3166]	Loss_D: 0.4869	Loss_G: 3.5377	D(x): 0.8627	D(G(z)): 0.2445 / 0.0390
[4/5][2950/3166]	Loss_D: 0.3020	Loss_G: 2.9883	D(x): 0.8112	D(G(z)): 0.0648 / 0.0752
[4/5][3000/3166]	Loss_D: 0.4115	Loss_G: 1.8549	D(x): 0.7239	D(G(z)): 0.0455 / 0.1967
[4/5][3050/3166]	Loss_D: 0.4445	Loss_G: 4.3488	D(x): 0.9556	D(G(z)): 0.3006 / 0.0194
[4/5][3100/3166]	Loss_D: 0.3787	Loss_G: 3.0758	D(x): 0.8032	D(G(z)): 0.1106 / 0.0688
[4/5][3150/3166]	Loss_D: 0.2489	Loss_G: 3.0560	D(x): 0.8768	D(G(z)): 0.0973 / 0.0670
In [21]:
torch.save(generator.state_dict(), 'generator_dcgan.pt')
torch.save(discriminator.state_dict(), 'discriminator_dcgan.pt')
In [22]:
plt.figure(figsize=(10,5))
plt.title("Generator and Discriminator Loss During Training")
plt.plot(G_losses,label="G")
plt.plot(D_losses,label="D")
plt.xlabel("iterations")
plt.ylabel("Loss")
plt.legend()
plt.show()
In [23]:
#%%capture
fig = plt.figure(figsize=(8,8))
plt.axis("off")
ims = [[plt.imshow(np.transpose(i,(1,2,0)), animated=True)] for i in img_list]
ani = animation.ArtistAnimation(fig, ims, interval=1000, repeat_delay=1000, blit=True)

HTML(ani.to_jshtml())
Out[23]:
In [34]:
save_image(img_list[1],"img_2.png")
In [26]:
# Grab a batch of real images from the dataloader
real_batch = next(iter(dataloader))

# Plot the real images
plt.figure(figsize=(15,15))
plt.subplot(1,2,1)
plt.axis("off")
plt.title("Real Images")
plt.imshow(np.transpose(utils.make_grid(real_batch[0].to(device)[:64], padding=5, normalize=True).cpu(),(1,2,0)))

# Plot the fake images from the last epoch
plt.subplot(1,2,2)
plt.axis("off")
plt.title("Fake Images")
plt.imshow(np.transpose(img_list[-1],(1,2,0)))
plt.show()
In [ ]: